home
***
CD-ROM
|
disk
|
FTP
|
other
***
search
/
Over 1,000 Windows 95 Programs
/
Over 1000 Windows 95 Programs (Microforum) (Disc 2).iso
/
1133
/
neural.c
< prev
next >
Wrap
C/C++ Source or Header
|
1997-04-16
|
13KB
|
526 lines
/*
**
** File: NEURAL.C
** Description: Executes a back propagation neural network
** Platform: Windows
**
**
*/
#include <math.h>
#include <windows.h>
#include <stdio.h>
#include <stdlib.h>
#include <malloc.h>
#include "nndefs.h"
#include "datamat.h"
#include "params.h"
#include "neural.h"
#define CIRCULAR
float Random(float randz);
void Logit(const char* fmt, ...);
static char FMT_E[] = "%e ";
int fgetstr (FILE *fd, LPSTR str);
int fgetint(FILE *fd);
long fgetlong(FILE *fd);
float fgetfloat(FILE *fd);
/*
** NCreateNeural
**
** This function is called to create a neural network. Only the shell of the network
** is created. To complete the creation of the network the import network function
** will create the other members.
**
**
** Arguments:
**
** None
**
** Returns:
**
** NEURAL A pointer to the created neural network or a NULL if not created
*/
NEURAL *NCreateNeural( )
{
NEURAL *pN;
pN = (NEURAL*) malloc (sizeof(NEURAL));
pN->m_version = REVLEVEL;
pN->m_istate = pN->m_ninputs = pN->m_noutputs = pN->m_nhidden = 0;
pN->m_params = NULL;
pN->m_sumerr2 = 0.f;
pN->m_cnt = 0;
pN->m_itmax = 2;
pN->m_dm=NULL;
pN->m_ni = NULL;
pN->m_nout = NULL;
pN->m_hinputw = NULL;
pN->m_houtputv = NULL;
pN->m_htheta = NULL;
pN->m_oinputw = NULL;
pN->m_otheta = NULL;
#ifdef CIRCULAR
pN->m_hcircw = NULL;
pN->m_ocircw = NULL;
#endif
#ifdef VERBOSE
Logit ("CNeural null constructor\n");
#endif
return pN;
}
/*
** NDeleteNeural
**
** This function is called to delete a neural network. All memory used by the network
** is freed.
**
**
** Arguments:
**
** Neural *pN A pointer to the neural network to be deleted
**
** Returns:
**
** Nothing
*/
void NDeleteNeural(NEURAL *pN )
{
#ifdef VERBOSE
Logit ("CNeural destructor\n");
#endif
if (pN->m_istate&NN_PARAMSLOADED) free (pN->m_params);
if (pN->m_noutputs) {
#ifdef VERBOSE
Logit (" Destroying in=%d hid=%d out=%d\n",pN->m_ninputs,pN->m_nhidden,pN->m_noutputs);
#endif
free_2d_floats (pN->m_hinputw, pN->m_nhidden);
free_2d_floats (pN->m_oinputw, pN->m_noutputs);
free_2d_floats (pN->m_iinputw, pN->m_noutputs);
free (pN->m_ni);
free (pN->m_houtputv);
free (pN->m_htheta);
free (pN->m_otheta);
free (pN->m_nout);
#ifdef CIRCULAR
free (pN->m_ocircw);
free (pN->m_hcircw);
#endif
}
if (pN->m_istate&NN_DYNAMIC) {
if(pN->m_noutputs) {
#ifdef VERBOSE
Logit (" Destroy dynamic\n");
#endif
free_2d_floats (pN->m_hlastdelta, pN->m_nhidden);
free_2d_floats (pN->m_olastdelta,pN->m_noutputs);
free_2d_floats (pN->m_ilastdelta,pN->m_noutputs);
free (pN->m_hlastvar);
free (pN->m_hlearn);
free (pN->m_htlearn);
free (pN->m_olastvar);
free (pN->m_otraining);
free (pN->m_olearn);
free (pN->m_otlearn);
}
}
if (pN->m_istate&NN_DATAMAT) {
#ifdef VERBOSE
Logit (" Destroy DM\n");
#endif
DDeleteDataMat(pN->m_dm);
}
free (pN);
#ifdef VERBOSE
Logit("Done deleting CNeural\n");
#endif
}
/*
** NGetROutput
**
** This function is called to get the re-scaled value of the neural network's output.
** The re-scaled value is the number as the user presented to the network. The network
** scales all numbers to a range of 0.2 to 0.8, where as the inputs and outputs to the
** neural network can be of any range.
**
**
** Arguments:
**
** NEURAL *pN A pointer to the neural network structure
** const int neuron The index to the desired output
**
** Returns:
**
** float The value of the selected neural network's output
*/
float NGetROutput(NEURAL *pN, const int neuron)
{
return (DRescale(pN->m_dm, pN->m_nout[neuron],'O',neuron));
}
/*
** NGetRInput
**
** This function is called to get the re-scaled value of the neural network's input.
** The re-scaled value is the number as the user presented to the network. The network
** scales all numbers to a range of 0.2 to 0.8, where as the inputs and outputs to the
** neural network can be of any range.
**
**
** Arguments:
**
** NEURAL *pN A pointer to the neural network structure
** const int neuron The index to the desired input
**
** Returns:
**
** float The value of the selected neural network's input
*/
float NGetRInput(NEURAL *pN, const int neuron)
{
return (DRescale(pN->m_dm, pN->m_ni[neuron],'I',neuron));
}
/*
** NSetRInput
**
** This function is called to set the value of the neural network's input.
** The value is scaled to the range that is optimal for this network.
**
**
** Arguments:
**
** NEURAL *pN A pointer to the neural network structure
** const int neuron The index to the desired input
**
** Returns:
**
** Nothing
*/
void NSetRInput(NEURAL *pN, const int neuron, float f)
{
pN->m_ni[neuron] = DScale(pN->m_dm, f,'I',neuron);
}
/*
** NFeedForward
**
** This function is called to evaluate the selected neural network. The network inputs
** must of been already loaded. After calling this function the network's outputs can
** be retrived.
**
**
** Arguments:
**
** NEURAL *pN A pointer to the neural network structure
**
** Returns:
**
** Nothing
*/
void NFeedForward(NEURAL *pN)
{
int i,j;
float sum1;
#ifdef CIRCULAR
float csum;
#endif
// calculate the outputs of the hidden layer
for (i=0; i < pN->m_nhidden; i++) {
for (sum1=0.f,j=0; j < pN->m_ninputs; j++)
sum1 += (pN->m_ni[j] * pN->m_hinputw[i][j] );
sum1 += pN->m_htheta[i];
#ifdef CIRCULAR
if (pN->m_params->m_TrainFlags&TF_CBPHIDDEN) {
for (csum=0.f,j=0; j < pN->m_ninputs; j++) {
csum += (pN->m_ni[j] * pN->m_ni[j] );
}
sum1 += (csum * pN->m_hcircw[i]);
}
#endif
pN->m_houtputv[i] = afunc (sum1);
}
// connect hidden layer 1 to output neurons
for (j=0; j < pN->m_noutputs; j++) {
for (sum1=0.f,i=0; i < pN->m_nhidden; i++)
sum1 += (pN->m_houtputv[i] * pN->m_oinputw[j][i]);
if (pN->m_params->m_TrainFlags & TF_NOINTOOUT) {
for (i=0; i < pN->m_ninputs; i++)
sum1 += (pN->m_ni[i] * pN->m_iinputw[j][i]);
}
sum1 += pN->m_otheta[j];
#ifdef CIRCULAR
if (pN->m_params->m_TrainFlags&TF_CBPOUTPUT) {
for (csum=0.f,i=0; i < pN->m_ninputs; i++) {
csum += (pN->m_ni[i] * pN->m_ni[i] );
// TRACE ("hi=%e csum=%e ocircw=%e\n",m_houtputv[i],csum,m_ocircw[j]);
}
sum1 += (csum * pN->m_ocircw[j]);
}
#endif
pN->m_nout[j] = afunc (sum1);
}
}
/*
** ntransl
**
** This function is used internally by the import neural network function.
**
**
** Arguments:
**
** char *cdummy A pointer to a character string
**
** Returns:
**
** int returns the tranlated string or a -1 if the string doesn't start with 'N'
*/
int ntransl(char *cdummy)
{
int val=0;
if (cdummy[0] != 'N') return -1;
val = atoi(&cdummy[1]);
return val;
}
/*
** NImportNetwork
**
** This function is called to import a neural network from an ENN file. This file is
** an ASCII exported file from NNMODEL.
**
**
** Arguments:
**
** NEURAL *pN A pointer to the already created network
** FILE *fd A pointer to a opened ENN file
**
** Returns:
**
** int returns a zero if the network was imported without error. A -1 is return on error.
*/
int NImportNetwork (NEURAL *pN, FILE *fd)
{
int i,j,sel,stat,h,o;
static char cdummy[80];
stat=ImportParams(fd,pN->m_params);
if (stat) return stat;
pN->m_sumerr2 = 0.f;
#ifdef VERBOSE
Logit("Loading neural\n");
#endif
top:
stat = fgetstr(fd,cdummy);
if (stat==EOF) goto errorexit;
sel = ntransl(cdummy);
switch (sel) {
case 99:
pN->m_dm = DCreateDataMat();
stat=DImportDataMat(pN->m_dm,fd);
#ifdef VERBOSE
Logit("Finished import NN\n");
#endif
return stat;
break;
default:
goto errorexit;
break;
case 1:
pN->m_istate = fgetint(fd);
pN->m_ninputs = fgetint(fd);
pN->m_nhidden = fgetint(fd);
pN->m_noutputs = fgetint(fd);
pN->m_cnt = fgetlong(fd);
pN->m_istate |= NN_PARAMSLOADED;
pN->m_ni = (float*) malloc (sizeof(float)*pN->m_ninputs);
pN->m_houtputv = (float*) malloc (sizeof(float)*pN->m_nhidden);
pN->m_htheta = (float*) malloc (sizeof(float)*pN->m_nhidden);
pN->m_otheta = (float*) malloc (sizeof(float)*pN->m_noutputs);
pN->m_nout = (float*) malloc (sizeof(float)*pN->m_noutputs);
pN->m_hinputw = alloc_2d_floats (pN->m_nhidden,pN->m_ninputs);
pN->m_oinputw = alloc_2d_floats (pN->m_noutputs,pN->m_nhidden);
pN->m_iinputw = alloc_2d_floats (pN->m_noutputs,pN->m_ninputs);
if (pN->m_istate&NN_DYNAMIC) {
pN->m_hlastvar = (float*) malloc (sizeof(float)*pN->m_nhidden);
pN->m_hlearn = (float*) malloc (sizeof(float)*pN->m_nhidden);
pN->m_htlearn = (float*) malloc (sizeof(float)*pN->m_nhidden);
pN->m_olastvar = (float*) malloc (sizeof(float)*pN->m_noutputs);
pN->m_otraining = (float*) malloc (sizeof(float)*pN->m_noutputs);
pN->m_olearn = (float*) malloc (sizeof(float)*pN->m_noutputs);
pN->m_otlearn = (float*) malloc (sizeof(float)*pN->m_noutputs);
pN->m_hlastdelta = alloc_2d_floats (pN->m_nhidden,pN->m_ninputs);
pN->m_olastdelta = alloc_2d_floats (pN->m_noutputs,pN->m_nhidden);
pN->m_ilastdelta = alloc_2d_floats (pN->m_noutputs,pN->m_ninputs);
}
for (i=0;i<pN->m_ninputs;i++) pN->m_ni[i] = 0.f;
for (h=0;h<pN->m_nhidden;h++) {
for (i=0;i<pN->m_ninputs;i++) pN->m_hinputw[h][i] = 0.f;
pN->m_houtputv[h] = pN->m_htheta[h] = 0.f;
}
for (o=0;o<pN->m_noutputs;o++) {
for (h=0;h<pN->m_nhidden;h++) pN->m_oinputw[o][h] = 0.f;
for (i=0;i<pN->m_ninputs;i++) pN->m_iinputw[o][i] = 0.f;
pN->m_otheta[o] = pN->m_nout[o] = 0.f;
}
break;
case 2:
for (i=0;i<pN->m_ninputs;i++)
for (j=0;j<pN->m_nhidden;j++) pN->m_hinputw[j][i]=fgetfloat(fd);
break;
case 3:
for (i=0;i<pN->m_nhidden;i++) pN->m_htheta[i]=fgetfloat(fd);
break;
case 4:
for (i=0;i<pN->m_noutputs;i++)
for (j=0;j<pN->m_nhidden;j++) pN->m_oinputw[i][j]=fgetfloat(fd);
break;
case 5:
for (i=0;i<pN->m_noutputs;i++) pN->m_otheta[i]=fgetfloat(fd);
break;
// Now load data for the dynamic part of CNeural class
case 6:
for (i=0;i<pN->m_nhidden;i++) pN->m_hlastvar[i]=fgetfloat(fd);
break;
case 7:
for (i=0;i<pN->m_nhidden;i++) pN->m_hlearn[i]=fgetfloat(fd);
break;
case 8:
for (i=0;i<pN->m_nhidden;i++) pN->m_htlearn[i]=fgetfloat(fd);
break;
case 9:
for (i=0;i<pN->m_noutputs;i++) pN->m_olastvar[i]=fgetfloat(fd);
break;
case 10:
for (i=0;i<pN->m_noutputs;i++) pN->m_olearn[i]=fgetfloat(fd);
break;
case 11:
for (i=0;i<pN->m_noutputs;i++) pN->m_otlearn[i]=fgetfloat(fd);
break;
case 12:
for (i=0;i<pN->m_noutputs;i++)
for (j=0;j<pN->m_ninputs;j++) pN->m_iinputw[i][j]=fgetfloat(fd);
break;
#ifdef CIRCULAR
case 13:
pN->m_hcircw = (float*) malloc (sizeof(float)*pN->m_nhidden);
pN->m_ocircw = (float*) malloc (sizeof(float)*pN->m_noutputs);
for (i=0;i<pN->m_nhidden;i++) pN->m_hcircw[i] = fgetfloat(fd);
for (i=0;i<pN->m_noutputs;i++) pN->m_ocircw[i] = fgetfloat(fd);
break;
#endif
}
goto top;
errorexit:
return -1;
}
/*
** LoadNetwork
**
** This function is called to create and import a neural network from an ENN file.
** This file is an ASCII exported file from NNMODEL.
**
**
** Arguments:
**
** char *filename A pointer to a characer string containing the file name
**
** Returns:
**
** NEURAL returns a pointer to the imported network or a NULL if tere was an error
*/
NEURAL *LoadNetwork (char *filename) {
FILE *fd;
int stat;
PARAMS *tparams;
NEURAL *tneural;
fd = fopen(filename,"r");
if (fd == NULL) {
Logit("File not found %s\n",filename);
return NULL;
}
tparams = (PARAMS*) malloc (sizeof(PARAMS));
tneural = (NEURAL*) malloc (sizeof(NEURAL));
tneural->m_params = tparams;
tneural->m_istate |= NN_PARAMSLOADED;
stat = NImportNetwork(tneural,fd);
fclose(fd);
if (stat) {
NDeleteNeural (tneural);
#ifdef VERBOSE
Logit ("Error importing network\n");
#endif
return NULL;
}
return tneural;
}
/*
** NInterrogate
**
** This function is called to load a neural network's inputs, evaluate the network
** and then retrieves the network's outputs
**
**
** Arguments:
**
** NEURAL *pN A pointer to the already created network
** float *Ivec A pointer to an array of input values
** float *Ovec A pointer to an array of output values
**
** Returns:
**
** Nothing
*/
void NInterrogate(NEURAL *pN,float *Ivec,float *Ovec)
{
int i;
for (i=0; i < pN->m_ninputs; i++) pN->m_ni[i] = DScale(pN->m_dm,Ivec[i],'I',i);
NFeedForward(pN);
for (i=0; i < pN->m_noutputs; i++) Ovec[i] = DRescale(pN->m_dm,pN->m_nout[i],'O',i);
}